}
}
- /// Returns the file stem for a given target/profile combo
+ /// Returns the file stem for a given target/profile combo (with metadata)
pub fn file_stem(&self, unit: &Unit) -> String {
match self.target_metadata(unit) {
Some(ref metadata) => format!("{}{}", unit.target.crate_name(),
}
}
+ /// Returns the bin stem for a given target (without metadata)
fn bin_stem(&self, unit: &Unit) -> String {
if unit.target.allows_underscores() {
unit.target.name().to_string()
}
}
+ /// Returns a tuple with the directory and name of the hard link we expect
+ /// our target to be copied to. Eg, file_stem may be out_dir/deps/foo-abcdef
+ /// and link_stem would be out_dir/foo
+ /// This function returns it in two parts so the caller can add prefix/suffis
+ /// to filename separately
+
+ /// Returns an Option because in some cases we don't want to link
+ /// (eg a dependent lib)
pub fn link_stem(&self, unit: &Unit) -> Option<(PathBuf, String)> {
let src_dir = self.out_dir(unit);
let bin_stem = self.bin_stem(unit);
.join("index.html").exists();
} else {
for (src, link_dst, _) in try!(cx.target_filenames(unit)) {
- missing_outputs |= fs::metadata(&src).is_err();
+ missing_outputs |= !src.exists();
if let Some(link_dst) = link_dst {
- missing_outputs |= fs::metadata(link_dst).is_err();
+ missing_outputs |= !link_dst.exists();
}
}
}
}
fn filename(cx: &Context, unit: &Unit) -> String {
- // If there exists a link stem, we have to use that since multiple target filenames
- // may hardlink to the same target stem. If there's no link, we can use the original
- // file_stem (which can include a suffix)
- let file_stem = cx.link_stem(unit)
- .map(|(_path, stem)| stem)
- .unwrap_or_else(|| cx.file_stem(unit));
+ // file_stem includes metadata hash. Thus we have a different
+ // fingerprint for every metadata hash version. This works because
+ // even if the package is fresh, we'll still link the fresh target
+ let file_stem = cx.file_stem(unit);
let kind = match *unit.target.kind() {
TargetKind::Lib(..) => "lib",
TargetKind::Bin => "bin",
self.build(unit.pkg)
} else if unit.target.is_example() {
self.examples().to_path_buf()
- } else if unit.target.is_lib() {
- self.deps().to_path_buf()
} else {
self.deps().to_path_buf()
}
} else {
try!(rustc(cx, unit))
};
- let dirty = work.then(dirty);
+ let link_work1 = try!(link_targets(cx, unit));
+ let link_work2 = try!(link_targets(cx, unit));
+ // Need to link targets on both the dirty and fresh
+ let dirty = work.then(link_work1).then(dirty);
+ let fresh = link_work2.then(fresh);
(dirty, fresh, freshness)
};
try!(jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness));
try!(fingerprint::append_current_dir(&dep_info_loc, &cwd));
}
- // If we're a "root crate", e.g. the target of this compilation, then we
- // hard link our outputs out of the `deps` directory into the directory
- // above. This means that `cargo build` will produce binaries in
- // `target/debug` which one probably expects.
- for (src, link_dst, _linkable) in filenames {
- // This may have been a `cargo rustc` command which changes the
- // output, so the source may not actually exist.
- debug!("Thinking about linking {} to {:?}", src.display(), link_dst);
- if !src.exists() || link_dst.is_none() {
- continue
- }
- let dst = link_dst.unwrap();
-
- debug!("linking {} to {}", src.display(), dst.display());
- if dst.exists() {
- try!(fs::remove_file(&dst).chain_error(|| {
- human(format!("failed to remove: {}", dst.display()))
- }));
- }
- try!(fs::hard_link(&src, &dst)
- .or_else(|err| {
- debug!("hard link failed {}. falling back to fs::copy", err);
- fs::copy(&src, &dst).map(|_| ())
- })
- .chain_error(|| {
- human(format!("failed to link or copy `{}` to `{}`",
- src.display(), dst.display()))
- }));
- }
-
Ok(())
}));
}
}
+/// Link the compiled target (often of form foo-{metadata_hash}) to the
+/// final target. This must happen during both "Fresh" and "Compile"
+fn link_targets(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
+ let filenames = try!(cx.target_filenames(unit));
+ Ok(Work::new(move |_| {
+ // If we're a "root crate", e.g. the target of this compilation, then we
+ // hard link our outputs out of the `deps` directory into the directory
+ // above. This means that `cargo build` will produce binaries in
+ // `target/debug` which one probably expects.
+ for (src, link_dst, _linkable) in filenames {
+ // This may have been a `cargo rustc` command which changes the
+ // output, so the source may not actually exist.
+ debug!("Thinking about linking {} to {:?}", src.display(), link_dst);
+ if !src.exists() || link_dst.is_none() {
+ continue
+ }
+ let dst = link_dst.unwrap();
+
+ debug!("linking {} to {}", src.display(), dst.display());
+ if dst.exists() {
+ try!(fs::remove_file(&dst).chain_error(|| {
+ human(format!("failed to remove: {}", dst.display()))
+ }));
+ }
+ try!(fs::hard_link(&src, &dst)
+ .or_else(|err| {
+ debug!("hard link failed {}. falling back to fs::copy", err);
+ fs::copy(&src, &dst).map(|_| ())
+ })
+ .chain_error(|| {
+ human(format!("failed to link or copy `{}` to `{}`",
+ src.display(), dst.display()))
+ }));
+ }
+ Ok(())
+ }))
+}
+
fn load_build_deps(cx: &Context, unit: &Unit) -> Option<Arc<BuildScripts>> {
cx.build_scripts.get(unit).cloned()
}
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
- /* Targets should be cached from the first build
- XXX Sadly these cannot be cached since the "symlink" step is
- not separate from the "compile" step. Packages which link
- to top level binaries eg (deps/foo-abc123 -> foo) are forced
- to do an extra recompile here.
- */
+ /* Targets should be cached from the first build */
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr("\
-[..]Compiling foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
assert_that(p.cargo("build").arg("--features").arg("foo"),
execs().with_status(0)
.with_stderr("\
-[..]Compiling foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
}
[DOCTEST] foo
"));
- /* Targets should be cached from the first build
- XXX Sadly these cannot be cached since the "symlink" step is
- not separate from the "compile" step. Packages which link
- to top level binaries eg (deps/foo-abc123 -> foo) are forced
- to do an extra recompile here.
- */
+ /* Targets should be cached from the first build */
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr("\
-[..]Compiling foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
assert_that(p.cargo("test").arg("foo"),
execs().with_status(0)
.with_stderr("\
-[..]Compiling foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE]
[DOCTEST] foo
[RUNNING] `target[/]debug[/]foo[EXE]`
"));
- /* Targets should be cached from the first build
- XXX Sadly these cannot be cached since the "symlink" step is
- not separate from the "compile" step. Packages which link
- to top level binaries eg (deps/foo-abc123 -> foo) are forced
- to do an extra recompile here.
- */
+ /* Targets should be cached from the first build */
assert_that(p.cargo("run"),
execs().with_status(0)
.with_stdout("feature off")
.with_stderr("\
-[..]Compiling foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]foo[EXE]`
"));
execs().with_status(0)
.with_stdout("feature on")
.with_stderr("\
-[..]Compiling foo v0.0.1 ([..])
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target[/]debug[/]foo[EXE]`
"));